% scribe: Daisy Yan Huang % lastupdate: Oct. 3, 2005 % lecture: 7 % title: Borel-Cantelli lemmas and almost sure convergence % references: Durrett, section 1.6 % keywords: Borel-Cantelli lemmas, almost sure convergence, a.s. convergence % end \documentclass[letterpaper,12pt]{article} \include{macros} \newcommand{\conv}{\longrightarrow} \begin{document} \lecture{7}{Borel-Cantelli lemmas and almost sure convergence} {Daisy Yan Huang}{yanhuang@stat.berkeley.edu} This set of notes is a revision of the work of Jin Kim, 2002. \section{Borel-Cantelli Lemmas} % keywords: Borel-Cantelli lemmas % end Recall that for real valued random variables $X_n$ and $X$, $$\begin{array}{ll} \{X_n\to X\} &=\{\omega: X_n(\omega)\to X(\omega)\} \\ &=\{\forall \epsilon > 0, |X_n-X|\leq\epsilon\ \mbox{ eventually}\} \end{array} $$ Thus, $$\begin{array}{ll} \P(X_n\to X)=1 &\Leftrightarrow \forall \epsilon>0,\ \P(|X_n-X|\leq \epsilon \ ev.)=1\\ &\Leftrightarrow\forall \epsilon>0,\ \P(|X_n-X|>\epsilon \ i.o.)=0 \end{array} $$ Let the event $A_n :=\{|X_n-X|>\epsilon\}$. Then, we are motivated by consideration of a.s. convergence to find useful conditions for $\P(A_n \mbox{ i.o.})=0$. Recall that $\{A_n \mbox{ i.o.}\}=\bigcap_n\ \bigcup_{m\geq n} A_m$. \begin{theorem}[Borel-Cantelli Lemmas] Let $(\Omega,F, \P)$ be a probability space and let $(A_n)$ be a sequence of events in F. Then, \begin{enumerate} \item If $\sum_n \P(A_n) < \infty$, then $\P(A_n \mbox{ i.o.}) = 0$. \\ \item If $\sum_n \P(A_n) = \infty$ and $A_n$ are independent, then $\P(A_n \mbox{ i.o.}) = 1$. \end{enumerate} \end{theorem} There are many possible substitutes for independence in BCL II, see Kochen-Stone Lemma. Before proving BCL, notice that \begin{itemize} \item $ \1(A_n \mbox{ i.o.}) = \limsup_{n \to \infty} \1( A_n ) $ \item $ \1(A_n \mbox{ ev.}) = \liminf_{n \to \infty} \1( A_n ) $ \item $ \{A_n \mbox{ i.o.}\} = \lim_{m \to \infty} ( \cup_{n > m} A_n ) \hspace{.53cm} (\mbox{note: as } m \uparrow, \; \cup_{n \ge m} A_n \downarrow \; )$ \item $ \{A_n \mbox{ ev.}\} = \lim_{m \to \infty} ( \cap_{n > m} A_n ) \hspace{.53cm} (\mbox{note: as } m \uparrow, \; \cap_{n \ge m} A_n \uparrow \; )$. \end{itemize} Therefore, \begin{align*} \P(A_n \mbox{ ev.}) & \le \liminf_{n \to \infty} \P(A_n) \hspace{.62cm} \mbox{ by Fatou's lemma }\\ & \le \limsup_{n \to \infty} \P(A_n) \hspace{.62cm} \mbox{ obvious from definition }\\ & \le \P(A_n \mbox{ i.o.}) \hspace{.62cm} \mbox{ dual of Fatou's lemma (i.e. apply to $-\P$)} \end{align*} \begin{proof} (Of BCL I) \begin{align*} \P(A_n \mbox{ i.o.}) & = \lim_{m \to \infty} \P(\cup_{n \ge m} A_n) \\ & \le \lim_{m \to \infty} \sum_{n \ge m}^\infty \P(A_n) \; = \; 0 \hspace{.32cm} \mbox{ since } \sum_{i=1}^\infty \P(A_n) < \infty . \end{align*} \end{proof} %{\bf Pf of BCL I} (Alternative method)\\ %Consider a random variable $N := \sum_\1(A_n)$, i.e. the number of events that occur. Then %$\E[N] = \sum_{n=1}^\infty \P(A_n)$ by the Monotone Convergence Theorem, and %\begin{align*} %\sum_{n=1}^\infty \P(A_n) < \infty & \Longrightarrow \E[N] < \infty\\ % & \Longrightarrow \P(N < \infty)=1 \\ % & \Longrightarrow \P(N = \infty)=0 \\ % & \Longrightarrow \P(A_n \mbox{ i.o.})=0 \hspace{.63cm} \mbox{ because }(N=\infty) \equiv ( A_n \mbox{ i.o.}) . \; \qedsymbol %\end{align*} \bigskip \begin{proof} (Of BCL II) Assume that $\Sigma \P(A_n)=\infty$ and the $A_n$'s are independent. We will show that $\P(A_n^c \mbox{ ev.}) =0$. \\ \begin{align} \P(A_n^c \mbox{ ev.}) &= \lim_{n \to \infty} \P(\cap_{m \ge n} A_m^c ) \; = \; \lim_{n \to \infty} \prod_{m \ge n} \P( A_m^c ) \label{eq:BCL2a}\\ &= \lim_{n \to \infty} \prod_{m \ge n} \left( 1-\P( A_m ) \right) \; \le \; \lim_{n \to \infty} \prod_{m \ge n} \exp{(-\P( A_m^c ))} \label{eq:BCL2b} \\ &= \lim_{n \to \infty} \exp{\left(-\sum_{m \ge n} \P( A_m^c )\right)} = 0 \nonumber \end{align} since $(-\sum_{m \ge n} \P( A_m^c ))\rightarrow \infty, \mbox{ as } n\rightarrow \infty$ For~\eqref{eq:BCL2a}, we used the following fact (due to the independence of $A_n$): \begin{align*} \P(\cap_{m \ge n} A_m^c ) = \lim_{N \to \infty} \P( \cap_{n \le m \le N} A_m^c) = \lim_{N \to \infty} \prod_{n \le m \le N} \P( A_m^c) = \prod_{n \le m} \P(A_m^c). \end{align*} For ~\eqref{eq:BCL2b}, $1-x \le \exp(-x)$ was used. \end{proof} \bigskip For an example in which the theorem cannot be applied, consider $A_n = (0,1/n)$ in $(0,1)$. Then, $\P(A_n) = 1/n$, $\sum \P(A_n) = \infty$, but $\P(A_n \mbox{ i.o.}) = \P( \emptyset ) =0$. \bigskip \begin{example} Consider random walk in $\Z^d$, $d=0, 1, \cdots$ $S_n=X_1+\cdots+X_n,, \; n=0, 1, \cdots$ where $X_i$ are independent in $\Z^d$. In the simplest case, each $X_i$ has uniform distribution on $2^d$ possible strings. i.e., if $d=3$, we have $2^3=8$ neighbors \begin{align*} \left\{ \begin{array}{c} (+1, +1, +1)\\ \vdots \\ (-1, -1, -1) \end{array} \right\} \; . \end{align*} Note that each coordinate of $S_n$ does a simple coin-tossing walk independently. We can prove that \begin{align} \P(S_n=0 \mbox{ i.o.}) = \left\{ \begin{array}{cll} 1 & \mbox{if } d=1 \mbox{ or } 2 & \mbox{(recurrent)} \\ 0 & \mbox{if } d \ge 3 & \mbox{(transient)} \; . \label{eq:randwalk:tran} \end{array} \right. \end{align} \end{example} \begin{proofsketch} (of \eqref{eq:randwalk:tran})\\ Let us start with $d=1$, then \begin{align} \P(S_{2n}=0) &= \P(\mbox{$n$ ``$+$'' signs and $n$ ``$-$'' signs})\\ &= \left( {2n \atop n} \right) 2^{-2n}\\ &\sim \frac{c}{\sqrt{n}} \mbox{ as } n \conv \infty . \end{align} where we used the facts that $ n ! \sim \left( {n \atop e} \right)^n \sqrt{2 \pi n}$, and that $a_n\thicksim b_n$ iff ${{a_n}\over {b_n}} \rightarrow 1 \mbox{ as }n\rightarrow \infty$. Note \begin{align} \sum \left( \frac{1}{\sqrt{n}} \right) ^d \left\{ \begin{array}{rl} = \infty & \hspace{.61cm} d=1, 2 \\ < \infty & \hspace{.61cm} d= 3, 4, \cdots \label{eq:randwalk:tran2} \end{array} \right. \end{align} Thus, $\sum_n{\P(S_{2n}=0)=\infty}$, and BC II and \eqref{eq:randwalk:tran2} together gives \eqref{eq:randwalk:tran}. \end{proofsketch} \begin{example}[for the case $d=1$] $\{S_2=0\}$ is the event of ending up back to the origin at step 2 when we started at the origin. $\P(S_{2}=0)=1/2$. Note: $$\P(S_{10,000}=0)\thicksim {c\over \sqrt{n}}\thickapprox 1/100,$$ $$\P(S_{10,002}=0)\thickapprox 1/100,$$ $$\P(S_{10,000}=0, S_{10,002}=0) =\P(S_{10,000}=0)\P(S_{10,002}=0| S_{10,000}=0) \thickapprox 1/100\cdot 1/2,$$ Later in the course, we will show that for the case $d=1$, even when the $(S_{2n}=0)$ are dependent, it is still true that $\P(S_{2n}=0 \mbox{ i.o. })=1$. \end{example} The same result holds for the case $d=2$. In general, % $$\P(S_{2n}=0)={{ { {2n} \choose n }\over {2^ {2n}} }^d} \approx {{c^d} \over {n^{d/2}}}.$$ % For $d=2$, this is $\sim {{c^2} \over {n}}$ which is not summable. Thus, $\P(S_{2n}=0 \mbox{ i.o.})=1$. For $d\geq3$, this is $\sim {{c^3} \over {n^{3/2}}}$ which is summable. Then, by {\bf BCL I}, $\P(S_{2n}=0 \mbox{ i.o.})=0$. \section{Almost sure convergence} % keywords: almost sure convergence, a.s. convergence % end Because \[X_n \conv X \mbox{ a.s.} \hspace{.81cm} \iff \hspace{.81cm} X_n-X \conv 0 \mbox{ a.s.} \; , \] it is enough to prove for the case of convergence to $0$. \begin{proposition} The following are equivalent: \begin{enumerate} \item $ X_n \ascv 0$ \item $ \forall \epsilon > 0,\ \P( | X_n | >\epsilon \mbox{ i.o.} ) =0 $ \item $ M_n \pcv 0$ where $M_n := \sup_{n \le k} | X_k |$ \item $ \forall\ \epsilon_n \downarrow 0 \; : \; \P( | X_n | > \epsilon_n \mbox{ i.o.} ) =0 $ \end{enumerate} \end{proposition} Note: ``$\forall$'' in Proposition 4 cannot be replaced by ``$\exists$''. For example, Let $X_n=(1/\sqrt{n})U_n$, where $U_1, U_2,...$ are independent $U[0,1]$. Take $\epsilon_n= {1/2}/{\sqrt{n}}$. Then, $\P(X_n>\epsilon_n)=\P(U_n>1/2)=1/2$. So, $\P(X_n>\epsilon_n \mbox{ i.o.})=1$. But if we take $\epsilon_n= {{1} \over {\sqrt{n}}}$. Then, $\P(X_n>\epsilon_n)=\P(U_n>1)=0$. \bigskip \begin{proof} (only for the equivalence of 1 and 3) Suppose Proposition 1 holds. If $X_n(\omega)\rightarrow 0$ a.s., then $\sup_{n \le k} | X_k(\omega) |\rightarrow 0$ a.s. But this implies that $M_n\rightarrow 0$ a.s. Thus, $M_n\pcv 0$. Conversely, if $M_n\ \downarrow \mbox{ as } n\uparrow$, then we know in advance that $M_n$ has a almost-surely-limit in $[0, \infty]$. \end{proof} \begin{lemma} If $X_n \pcv X$, then there exists a subsequence $n_k$ such that $X_{n_k} \rightarrow X$ a.s. \end{lemma} \begin{proof} It is enough to show that there exists $\epsilon_k \downarrow 0$ such that $ \sum_k{\P(|X_{n_k} - X|> \epsilon_k)}<\infty$. We can take $\epsilon_k = 1/k \mbox{ and choose } n_k$ so that $\P(|X_{n_k} - X|>1/k)\leq 1/{2^k}$. Then, $ \sum_k{\P(|X_{n_k} - X|> \epsilon_k)}<\infty$, and by {\bf BCL I} we can conclude that $X_{n_k} \rightarrow X$ a.s. \end{proof} \end{document}